#define MAX_CPU_ID 255
xc_physinfo_t info;
char cpu_cap[128], virt_caps[128], *p;
- int i, j, max_cpu_id;
+ int i, j, max_cpu_id, nr_nodes = 0;
uint64_t free_heap;
PyObject *ret_obj, *node_to_cpu_obj, *node_to_memory_obj;
PyObject *node_to_dma32_mem_obj;
if ( p != virt_caps )
*(p-1) = '\0';
- ret_obj = Py_BuildValue("{s:i,s:i,s:i,s:i,s:i,s:l,s:l,s:l,s:i,s:s:s:s}",
- "nr_nodes", info.nr_nodes,
- "max_cpu_id", info.max_cpu_id,
- "threads_per_core", info.threads_per_core,
- "cores_per_socket", info.cores_per_socket,
- "nr_cpus", info.nr_cpus,
- "total_memory", pages_to_kib(info.total_pages),
- "free_memory", pages_to_kib(info.free_pages),
- "scrub_memory", pages_to_kib(info.scrub_pages),
- "cpu_khz", info.cpu_khz,
- "hw_caps", cpu_cap,
- "virt_caps", virt_caps);
-
max_cpu_id = info.max_cpu_id;
if ( max_cpu_id > MAX_CPU_ID )
max_cpu_id = MAX_CPU_ID;
- /* Construct node-to-cpu lists. */
+ /* Construct node-to-* lists. */
node_to_cpu_obj = PyList_New(0);
-
- /* Make a list for each node. */
- for ( i = 0; i < info.nr_nodes; i++ )
+ node_to_memory_obj = PyList_New(0);
+ node_to_dma32_mem_obj = PyList_New(0);
+ for ( i = 0; i <= info.max_node_id; i++ )
{
+ int node_exists = 0;
+ PyObject *pyint;
+
+ /* CPUs. */
PyObject *cpus = PyList_New(0);
for ( j = 0; j <= max_cpu_id; j++ )
- if ( i == map[j]) {
- PyObject *pyint = PyInt_FromLong(j);
- PyList_Append(cpus, pyint);
- Py_DECREF(pyint);
- }
+ {
+ if ( i != map[j] )
+ continue;
+ pyint = PyInt_FromLong(j);
+ PyList_Append(cpus, pyint);
+ Py_DECREF(pyint);
+ node_exists = 1;
+ }
PyList_Append(node_to_cpu_obj, cpus);
Py_DECREF(cpus);
- }
-
- node_to_memory_obj = PyList_New(0);
-
- for ( i = 0; i < info.nr_nodes; i++ )
- {
- PyObject *pyint;
+ /* Memory. */
xc_availheap(self->xc_handle, 0, 0, i, &free_heap);
+ node_exists = node_exists || (free_heap != 0);
pyint = PyInt_FromLong(free_heap / 1024);
PyList_Append(node_to_memory_obj, pyint);
Py_DECREF(pyint);
- }
- /* DMA memory. */
- node_to_dma32_mem_obj = PyList_New(0);
-
- for ( i = 0; i < info.nr_nodes; i++ )
- {
- PyObject *pyint;
+ /* DMA memory. */
xc_availheap(self->xc_handle, 0, 32, i, &free_heap);
pyint = PyInt_FromLong(free_heap / 1024);
PyList_Append(node_to_dma32_mem_obj, pyint);
Py_DECREF(pyint);
+
+ if ( node_exists )
+ nr_nodes++;
}
+ ret_obj = Py_BuildValue("{s:i,s:i,s:i,s:i,s:i,s:l,s:l,s:l,s:i,s:s:s:s}",
+ "nr_nodes", nr_nodes,
+ "max_node_id", info.max_node_id,
+ "max_cpu_id", info.max_cpu_id,
+ "threads_per_core", info.threads_per_core,
+ "cores_per_socket", info.cores_per_socket,
+ "nr_cpus", info.nr_cpus,
+ "total_memory", pages_to_kib(info.total_pages),
+ "free_memory", pages_to_kib(info.free_pages),
+ "scrub_memory", pages_to_kib(info.scrub_pages),
+ "cpu_khz", info.cpu_khz,
+ "hw_caps", cpu_cap,
+ "virt_caps", virt_caps);
PyDict_SetItemString(ret_obj, "node_to_cpu", node_to_cpu_obj);
Py_DECREF(node_to_cpu_obj);
PyDict_SetItemString(ret_obj, "node_to_memory", node_to_memory_obj);
*
* int first_node(mask) Number lowest set bit, or MAX_NUMNODES
* int next_node(node, mask) Next node past 'node', or MAX_NUMNODES
+ * int last_node(mask) Number highest set bit, or MAX_NUMNODES
* int first_unset_node(mask) First node not set in mask, or
* MAX_NUMNODES.
*
/* FIXME: better would be to fix all architectures to never return
> MAX_NUMNODES, then the silly min_ts could be dropped. */
-#define first_node(src) __first_node(&(src))
-static inline int __first_node(const nodemask_t *srcp)
+#define first_node(src) __first_node(&(src), MAX_NUMNODES)
+static inline int __first_node(const nodemask_t *srcp, int nbits)
{
- return min_t(int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES));
+ return min_t(int, nbits, find_first_bit(srcp->bits, nbits));
}
-#define next_node(n, src) __next_node((n), &(src))
-static inline int __next_node(int n, const nodemask_t *srcp)
+#define next_node(n, src) __next_node((n), &(src), MAX_NUMNODES)
+static inline int __next_node(int n, const nodemask_t *srcp, int nbits)
{
- return min_t(int,MAX_NUMNODES,find_next_bit(srcp->bits, MAX_NUMNODES, n+1));
+ return min_t(int, nbits, find_next_bit(srcp->bits, nbits, n+1));
+}
+
+#define last_node(src) __last_node(&(src), MAX_NUMNODES)
+static inline int __last_node(const nodemask_t *srcp, int nbits)
+{
+ int node, pnode = nbits;
+ for (node = __first_node(srcp, nbits);
+ node < nbits;
+ node = __next_node(node, srcp, nbits))
+ pnode = node;
+ return pnode;
}
#define nodemask_of_node(node) \